%res
%load_ext autoreload
%autoreload 2
from additive.utility import *
from additive.features import *
client = Client("tcp://10.142.0.26:8786")
client.restart()
files = ['/home/ben_rasoolov/additive_data/experiment_03/V17_T2_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V13_T1_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V11_T1_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V11_T2_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V12_T1_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V17_T2_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_proiject/data/experiment_03/V14_T2_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V13_T2_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V18_T2_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V11_T1_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V13_T1_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V14_T1_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V18_T2_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V11_T2_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V17_T1_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V11_T2_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V11_T2_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V17_T1_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V13_T1_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V11_T1_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V11_T1_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V12_T2_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V14_T1_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V12_T1_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V17_T2_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V17_T1_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V18_T1_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V12_T2_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V17_T1_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V13_T2_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V13_T1_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V13_T2_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V13_T2_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V17_T2_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V14_T2_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V18_T1_Left(Bottom)_500X_3D.info']
from scipy.ndimage import sobel
import cv2
def get_top_and_bottom(x):
mask = 1-cv2.dilate(x.astype('uint8'), np.ones((40, 40)))
t_, contours, hierarchy = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
good_contours = sorted(contours, key=lambda x: -len(x))[:2]
print([len(x) for x in good_contours])
imt = np.zeros_like(mask)
cv2.drawContours(imt, good_contours, -1, 1, -1)
plt.imshow(imt)
plt.show()
return imt
from additive.utility import dfe
from additive.preprocessing import load_and_process_image
def get_heatmap(file_name):
d, f, e = dfe(file_name)
image = load_and_process_image(file_name, transform_fun=lambda x: x['value'].x)
sub_image_whole = image#.copy()#[500:-500, 500:-500]
#sub_image = sub_image_whole[500:-500:20, 500:-500:20].copy()
#fig, axes = plt.subplots(figsize=(15, 10))
mu = sub_image_whole[500:-500, 500:-500].mean()
std = sub_image_whole[500:-500, 500:-500].std()
cond = (sub_image_whole>mu-std) | (sub_image_whole < mu-4*std)
sub_image_whole[np.where(cond)] = mu-std
#sub_image_whole[np.where(get_top_and_bottom(cond))] = mu
#plt.imshow(sub_image_whole[500:-500, 100:-100], cmap='jet')
#plt.axis('off')
plt.imsave(d+f+'.png', sub_image_whole[500:-500, 100:-100], cmap='jet')
from multiprocessing import Pool
with dask.config.set(pool=Pool(8)):
bag.from_sequence(files).map(get_heatmap).compute()
files = glob.glob("/home/ben_rasoolov/additive_data/experiment_03/*png")
for file in files:
image = cv2.imread(file)
image2 = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
cv2.imwrite(file, image2)
fig, axes = plt.subplots(figsize=(10, 10))
files = glob.glob("/home/ben_rasoolov/additive_data/experiment_03/*png")
file = np.random.choice(files)
plt.imshow(cv2.imread(file))
%reset -f
%load_ext autoreload
%autoreload 2
from additive.utility import *
from additive.features import *
files = [
'..periment_03/V17_T2_Right(Top)_500X_3D.info',
'../data/experiment_03/V13_T1_Left(Bottom)_500X_3D.info',
'../data/experiment_03/Polished_V11_T1_Left(Bottom)_500X_3D.info',
'../data/experiment_03/V11_T2_Right(Top)_500X_3D.info',
'../data/experiment_03/V12_T1_Right(Top)_500X_3D.info',
'../data/experiment_03/V17_T2_Left(Bottom)_500X_3D.info',
'../data/experiment_03/V14_T2_Left(Bottom)_500X_3D.info',
'../data/experiment_03/V13_T2_Right(Top)_500X_3D.info',
'../data/experiment_03/V18_T2_Left(Bottom)_500X_3D.info',
'../data/experiment_03/Polished_V11_T1_Right(Top)_500X_3D.info',
'../data/experiment_03/V13_T1_Right(Top)_500X_3D.info',
'../data/experiment_03/V14_T1_Left(Bottom)_500X_3D.info',
'../data/experiment_03/V18_T2_Right(Top)_500X_3D.info',
'../data/experiment_03/V11_T2_Left(Bottom)_500X_3D.info',
'../data/experiment_03/Polished_V17_T1_Left(Bottom)_500X_3D.info',
'../data/experiment_03/Polished_V11_T2_Right(Top)_500X_3D.info',
'../data/experiment_03/Polished_V11_T2_Left(Bottom)_500X_3D.info',
'../data/experiment_03/V17_T1_Right(Top)_500X_3D.info',
'../data/experiment_03/Polished_V13_T1_Right(Top)_500X_3D.info',
'../data/experiment_03/V11_T1_Left(Bottom)_500X_3D.info',
'../data/experiment_03/V11_T1_Right(Top)_500X_3D.info',
'../data/experiment_03/V12_T2_Right(Top)_500X_3D.info',
'../data/experiment_03/V14_T1_Right(Top)_500X_3D.info',
'../data/experiment_03/V12_T1_Left(Bottom)_500X_3D.info',
'../data/experiment_03/Polished_V17_T2_Left(Bottom)_500X_3D.info',
'../data/experiment_03/V17_T1_Left(Bottom)_500X_3D.info',
'../data/experiment_03/V18_T1_Right(Top)_500X_3D.info',
'../data/experiment_03/V12_T2_Left(Bottom)_500X_3D.info',
'../data/experiment_03/Polished_V17_T1_Right(Top)_500X_3D.info',
'../data/experiment_03/Polished_V13_T2_Left(Bottom)_500X_3D.info',
'../data/experiment_03/Polished_V13_T1_Left(Bottom)_500X_3D.info',
'../data/experiment_03/V13_T2_Left(Bottom)_500X_3D.info',
'../data/experiment_03/Polished_V13_T2_Right(Top)_500X_3D.info',
'../data/experiment_03/Polished_V17_T2_Right(Top)_500X_3D.info',
'../data/experiment_03/V14_T2_Right(Top)_500X_3D.info',
'../data/experiment_03/V18_T1_Left(Bottom)_500X_3D.info']
file = np.random.choice(files)
file = '../data/experiment_03/V11_T1_Right(Top)_500X_3D.info'
print(file)
tmp = joblib.load(file)
try:
image = np.array(tmp['value'].x)
except:
image = tmp
%matplotlib notebook
# create the x and y coordinate arrays (here we just use pixel indices)
from additive.experimental import plot_3d_surface
from additive.preprocessing import process_image
tilter = process_image(image, degree=2)
tilter.mean(), image.mean()
fig, axes = plt.subplots(figsize=(8, 8))
ax = fig.gca(projection='3d')
plot_3d_surface(tilter, step=70, ax=ax, cmap='jet')
plot_3d_surface(image+500, step=70, ax=ax, cmap='jet')
# plt.savefig(f"/home/ben_rasoolov/additive_data/figures/3d_tilted_vs_original_{f}.png", dpi=300)
%matplotlib inline
font = {'size' : 15}
matplotlib.rc('font', **font)
fig, axes = plt.subplots(figsize=(15, 3))
axis=1
t1 = image[500:-500, 500:-500]
t2 = tilter[500:-500, 500:-500]
t1 = t1 - t1.mean()
t2 = t2 - t2.mean()
plt.plot(np.arange(t1.shape[1-axis]), t1.mean(axis=axis), label="Original", linewidth=3)
plt.plot(np.arange(t2.shape[1-axis]), t2.mean(axis=axis), label="Tilted & aligned", linewidth=3)
axes.legend()
d, f, e = dfe(file)
plt.savefig(f"../data/figures/side_view_tilted_vs_original_{f}.png", dpi=300)
import tensorflow as tf
%load_ext autoreload
%autoreload 2
import numpy as np
import joblib
import glob
files = glob.glob("/home/ben_rasoolov/additive_data/experiment_03/*info")
file_name = np.random.choice(files)
print(file_name)
data = joblib.load(file_name)
image = np.array(data['value'].x)
from additive.preprocessing import *
thresh = image.mean()-image.std()
get_image_alignment_slope(image[:, 300:-300], thresh)#, image.mean()-image.std())
rotated_image = correct_aligment(image, 300)
get_image_alignment_slope(rotated_image[:, 300:-300], 0)
import matplotlib.pyplot as plt
plt.imshow(rotated_image)
from additive.preprocessing import adjust_tilt
tilted_rotated_image = adjust_tilt(rotated_image[1000:-1000, 300:-300], 8)
tilted_rotated_image.max()
plt.imshow(tilted_rotated_image)
plt.imshow(rotated_image[1000:-1000, 300:-300])
%load_ext autoreload
%autoreload 2
%reset -f
import numpy as np
import joblib
import glob
from imports import *
from additive.feature_functions import feature_functions_functions as feature_funs
from additive.features import Features
from additive.preprocessing import load_and_process_image
from functools import reduce
from operator import or_
def isin(values, text, ignore_case=False):
if ignore_case:
return reduce(or_, [v.lower() in text.lower() for v in values])
return reduce(or_, [v in text for v in values])
################### ################### ################### ###################
# version 1: all files
#files = glob.glob("/home/ben_rasoolov/additive_data/experiment_03/*info")
#chosen_files = [f for f in files if isin(['v18', 'v12', 'v14', 'v13', 'v11', 'v17'], f, ignore_case=True)]
# version 2: v19 repeated measurements
################### ################### ################### ###################
info_files = glob.glob("/home/ben_rasoolov/additive_project/data/experiment_03//*_2.info")
chosen_files = info_files + [x.replace("_2", "") for x in info_files]
len(chosen_files)
def get_features(img, features=None):
if features is None:
return {feature: fun(img) for feature, fun in feature_funs.items()}
return {feature: fun(img) for feature, fun in feature_funs.items() if feature in features}
from multiprocessing import Pool
import dask
def get_features_from_files(chosen_files, n_procs=8):
images = bag.from_sequence(chosen_files)
preprocessed_images = images.map(load_and_process_image,
transform_fun=lambda data: np.array(data['value'].x), crop_size=(500, 300))
features = preprocessed_images.map(get_features)
with Pool(min(len(chosen_files), n_procs)) as p:
with dask.config.set(pool=p):
out = features.compute()
return out
out = get_features_from_files(chosen_files)
import pandas as pd
from additive.utility import get_file_info
df = pd.DataFrame(dict(zip(chosen_files, out))).T
df.index = df.index.map(lambda x: x.split("/")[-1].split(".")[0]).rename("file")
df = df.reset_index()
df = get_file_info(df['file']).join(df)
df
res = df.iloc[:4, 5:].values - df.iloc[4:, 5:].values
from scipy.stats import t
t_value = t.ppf(.95, len(res)-1)
result = ((res.mean(axis=0) - res.std(axis=0)/2*t_value)>0) | ((res.mean(axis=0) + res.std(axis=0)/2*t_value)<0)
p1 = t.cdf(res.mean(axis=0) - res.std(axis=0)/2*t_value, len(res)-1)
p2 = 1 - t.cdf(res.mean(axis=0) + res.std(axis=0)/2*t_value, len(res)-1)
pvalue = np.minimum(p1, p2)
pvalue
pd.concat([pd.Series(result.reshape(-1), name='reject_null'),
pd.Series(pvalue.reshape(-1), name='pvalue').round(decimals=2)], axis=1)
#df.to_csv("/home/ben_rasoolov/additive_data/paper/global_stats_tilted_rotated_cropped_v02.csv")
%load_ext autoreload
%autoreload 2
%reset -f
from imports import *
#client = Client("tcp://10.142.0.26:8786")
#client = Client(scheduler_file="/home/ben_rasoolov/sched")
#client.restart()
df = pd.read_csv("/home/ben_rasoolov/additive_project/data/paper/data/global_stats_tilted_rotated_cropped_v02.csv")
res = df.groupby(['specimen', 'ispolished']).agg(['mean'])
tmp = df.sort_values(['ispolished', 'specimen', 'T', 'RL'])
r = tmp[tmp['RL'] == 'R']
l = tmp[tmp['RL'] == 'L']
rl = r.merge(l, on=['ispolished', 'specimen', 'T'], suffixes=["_r", "_l"])
cols = ['ra_1d', 'rq_1d',
'rsk_1d', 'rku_1d', 'rp_1d', 'rv_1d', 'ra_2d', 'rq_2d',
'rp_2d', 'rv_2d', 'rsk_2d', 'rku_2d']
col = np.random.choice(cols)
for col in cols:
m = (rl[col+"_r"] - rl[col+"_l"]).std()/np.sqrt(len(rl))
t1 = (rl[col+"_r"] - rl[col+"_l"]).mean() - 1.75 * m
t2 = (rl[col+"_r"] - rl[col+"_l"]).mean() + 1.75 * m
print(col, t1, t2)
df
%reset -f
%load_ext autoreload
%autoreload 2
from additive.features import Features
from additive.utility import dfe
from imports import *
root_dir = "/home/ben_rasoolov/additive_project/data/"
def get_undone_files(big_file_dir, info_file_dir):
root_dir = dfe(big_file_dir)[0]
big_files = glob.glob(big_file_dir)
info_files = glob.glob(info_file_dir)
undone_files = [f"{root_dir}/{x}.pd"
for x in set(dfe(x)[1] for x in big_files)-set(dfe(x)[1] for x in info_files)]
return undone_files
undone_files = get_undone_files(root_dir+"original_images/*pd", root_dir+"experiment_03/*info")
undone_files
from additive.preprocessing import gkern2d
from scipy.ndimage import zoom, convolve
def get_all_stats(files, n_processes=4):
s = np.array([2.330435, 2.33016])
M = 31
_ = gkern2d(M, 5)
k3 = _ / _.sum()
original_images = bag.from_sequence(files).map(joblib.load)
resized_images = original_images.map(zoom, zoom=1/s)
smoothed_images = resized_images.map(convolve, weights=k3)
c = smoothed_images.map(Features).map(lambda x: x.run_all_tests())
from multiprocessing import Pool
with dask.config.set(pool=Pool(min(n_processes, len(files)))):
all_stats = c.compute()
return all_stats
all_stats = get_all_stats(undone_files)
for name, value in zip(undone_files, all_stats):
dic = {'name': name, 'variation': 'Normal', 'value': value}
d, f, e = dfe(name)
path = root_dir+"experiment_03/"+f+".info"
print(path)
if os.path.exists(path):
print(f"path {path} exists")
continue
joblib.dump(dic, path)
%reset -f
%load_ext autoreload
%autoreload 2
from imports import *
from additive.features import Features
root = "/home/ben_rasoolov/additive_project/data/dataset"
file_names = set(glob.glob(f'{root}/*_3d.features')) - {f'{root}/v06_T2_L_3d.features'}
len(file_names)
images_mapper = [
'Original', # lambda x, #x,
'25% area', # lambda x, # random_sub_image(x, .5),
'6.25% area', # lambda x, # random_sub_image(x, .25),
'50% length', # lambda x, # random_sub_length_image(x, .5),
'25% length', # lambda x, # random_sub_length_image(x, .25),
# '50% length rotate', # lambda x, # align_image(random_sub_length_image(x)),
# '50% length tilt', # lambda x, # adjust_tilt(random_sub_length_image(x)),
'Tilted', # lambda x, # adjust_tilt(x),
'Tilted & Rotated', # lambda x, # adjust_tilt(align_image(x))
'50% width', # lambda x, # random_sub_width_image(x),
'25% width', # lambda x, # random_sub_width_image(x, .25),
# '50% width tilt', # lambda x, # adjust_tilt(random_sub_width_image(x)),
# '50% random', # lambda x, # random_sub_profile_image(x, .50),
]
indices = [0, 1, 2, 3, 4, 7, 8]
out = []
def get_all_stats(file_name):
res = joblib.load(file_name)
print(file_name)
tmp = pd.concat([res[i].statistics.assign(Variation=images_mapper[i]) for i in indices])
return tmp.assign(file=file_name.split("/")[-1])
with Pool(8) as p:
out = p.map(get_all_stats, file_names)
stats = pd.concat(out)
stats.to_csv("/home/ben_rasoolov/additive_project/data/paper/stats.csv", index=False)
cond = stats['Variation'] != '6.25% area'
data = stats[cond].groupby(['file', 'Variation']).mean().reset_index()
data = data.set_index(['Variation', 'file']).stack().rename('Statistic Value($\mu m$)').reset_index()\
.rename(columns = {"level_2": "Measure"})
sns.set_palette("deep")
ax = sns.catplot(data=data, kind='bar', height=8.27, aspect=11.7/8.27,
x='Measure', y='Statistic Value($\mu m$)', hue='Variation')
#savefig('global_measure_comparison.svg')
%reset -f
%load_ext autoreload
%autoreload 2
from imports import *
from additive.feature_functions import feature_functions_functions as feature_functions
files = ['/home/ben_rasoolov/additive_project/data/experiment_03/V17_T2_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V13_T1_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V11_T1_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V11_T2_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V12_T1_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V17_T2_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V14_T2_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V13_T2_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V18_T2_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V11_T1_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V13_T1_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V14_T1_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V18_T2_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V11_T2_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V17_T1_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V11_T2_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V11_T2_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V17_T1_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V13_T1_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V11_T1_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V11_T1_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V12_T2_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V14_T1_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V12_T1_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V17_T2_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V17_T1_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V18_T1_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V12_T2_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V17_T1_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V13_T2_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V13_T1_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V13_T2_Left(Bottom)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V13_T2_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V17_T2_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V14_T2_Right(Top)_500X_3D.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V18_T1_Left(Bottom)_500X_3D.info']
variations = []
def var_fun(fun):
variations.append((fun.__name__, fun))
return fun
@var_fun
def original(img):
return img
@var_fun
def half_area_center(img):
w, h = img.shape
cw, ch = w//2, h//2
#x, y = np.random.randint(max(1,w-ww)), np.random.randint(max(1,h-hh))
return img[cw-w//4:cw+w//4, ch-h//4:ch+h//4]
@var_fun
def half_length_center(img):
w, h = img.shape
cw, ch = w//2, h//2
return img[:, ch-h//4:ch+h//4]
@var_fun
def half_width_center(img):
w, h = img.shape
cw, ch = w//2, h//2
return img[cw-w//4:cw+w//4, :]
@var_fun
def random_profiles(img, ratio=.5):
return img[np.random.rand(len(img))<ratio]
@var_fun
def half_length_left(img, ratio=.5):
w, h = img.shape
return img[:, :h//2]
@var_fun
def half_length_right(img, ratio=.5):
w, h = img.shape
return img[:, h//2:]
@var_fun
def half_width_top(img, ratio=.5):
w, h = img.shape
return img[:w//2, :]
@var_fun
def half_width_bottom(img, ratio=.5):
w, h = img.shape
return img[w//2:, :]
my_funs = [(k, v) for k, v in feature_functions.items() if '1d' in k]
def prepare_image(data):
image = np.array(data['value'].x)[500:-500, 500:-500]
out = {}
for variation, var_fun in variations:
for feature, feature_fun in my_funs:
x = var_fun(image)
print(x.shape)
out[variation, feature] = feature_fun(x)
return out
%load_ext autoreload
%autoreload 2
%reset -f
from imports import *
from additive.utility import dfe
from sh import wget
links = {
"V19_T1_Left(Bottom)_500X_3D_2.csv": "https://auburn.box.com/shared/static/0iy6dopmmq8c8owpkjc1rddn48czeam1.csv",
"V19_T1_Right(Top)_500X_3D_2.csv": "https://auburn.box.com/shared/static/i39htg5ku0wxphc9l6yebe63g7qatsd9.csv",
"V19_T2_Left(Bottom)_500X_3D_2.csv": "https://auburn.box.com/shared/static/856x1lsjqr0ldl7dlesv1rsyzdw84b3x.csv",
"V19_T2_Right(Top)_500X_3D_2.csv": "https://auburn.box.com/shared/static/6kykrfjuwiounoduupk4yebuq2ah4k80.csv"
}
#for name, link in links.items():
# wget(link, "-O", "/home/ben_rasoolov/additive_project/data/original_images/"+name)
csv_files = glob.glob("/home/ben_rasoolov/additive_project/data/original_images/*csv")
csv_files
def proces_csv_files(file):
print(file)
d, f, e = dfe(file)
x = pd.read_csv(file, header=None)
joblib.dump(x.values.astype('float32'), d+f+".pd")
with Pool(4) as p:
p.map(proces_csv_files, csv_files)
%load_ext autoreload
%autoreload 2
%reset -f
from imports import *
from additive.utility import dfe, get_file_info
from sh import wget
from numpy import genfromtxt
info_files = glob.glob("/home/ben_rasoolov/additive_project/data/experiment_03//*_2.info")
info_files = info_files + [x.replace("_2", "") for x in info_files]
def get_stats_from_file(data):
if isinstance(data, str):
data = joblib.load(data)['value']
return data.statistics, data.circle_statistics#.describe()
with Pool(8) as p:
all_stats_ = p.map(get_stats_from_file, info_files)
global_stats_ = [x for x, y in all_stats_]
circle_stats_ = [y for x, y in all_stats_]
global_stats = pd.concat([x.assign(file=dfe(f)[1]) for x, f in zip(global_stats_, info_files)])\
.reset_index()#.drop('index', axis=1)#.rename(columns={"index": "measure"})
global_stats['repetition'] = global_stats['file'].str.contains("_2") * 1 + 1
global_stats['file'] = global_stats['file'].str.replace("_2", '')
global_stats
%matplotlib inline
fig, ax = plt.subplots(figsize=(15, 8))
#tt = t.stack().reset_index().rename(columns={'level_2': 'measure', 0: 'value'})
sns.barplot(data=global_stats, x='file', y='ra', hue='repetition')
from scipy.stats import ttest_ind
for col in ['rho', 'ra', 'rv', 'rz', 'rq']:
if col not in global_stats.columns:
continue
for file in np.unique(global_stats['file']):
cond1 = (global_stats['repetition'] == 1) & (global_stats['file'] == file)
cond2 = (global_stats['repetition'] == 2) & (global_stats['file'] == file)
ra = global_stats[col]
pvalue = ttest_ind(ra[cond1], ra[cond2]).pvalue
if pvalue > .05:
print(file, col, pvalue)
s = np.array([2.330435, 2.33016])
1/s
Todo:
%load_ext autoreload
%autoreload 2
%reset -f
from imports import *
_ = [x.split('\t') for x in """Surface Condition specimen Frequency (Hz) Strain Amplitude (mm/mm) Fatigue Life (cylces) Reversal to failure (2Nf)
As-built V02 5 0.004 45859 91718
As-built V10 5 0.004 49177 98354
As-built V04 7.5 0.003 91222 182444
As-built V16 7.5 0.003 110013 220026
As-built V08 7.5 0.003 136464 272928
As-built V12 8 0.0025 192404 384808
As-built V18 8 0.0025 259128 518256
As-built V14 8 0.0025 320856 641712
As-built V06 10 0.002 5000000 10000000
Half-polished V07 5 0.004 50916 101832
Half-polished V09 5 0.004 60992 121984
Half-polished V15 7.5 0.003 132668 265336
Half-polished V05 7.5 0.003 153540 307080
Half-polished V03 7.5 0.003 163123 326246
Half-polished V13 8 0.0025 287061 574122
Half-polished V11 8 0.0025 291206 582412
Half-polished V17 8 0.0025 395801 791602
Half-polished V01 10 0.002 5000000 10000000""".split('\n')]
fatigue = pd.DataFrame(_[1:], columns=_[0])
#fatigue['Specimen ID'] = fatigue['Specimen ID']#.str.lower()
for c in ['Frequency (Hz)', 'Strain Amplitude (mm/mm)', 'Fatigue Life (cylces)', 'Reversal to failure (2Nf)']:
fatigue[c] = pd.to_numeric(fatigue[c])
fatigue['ispolished'] = fatigue['Surface Condition'].str.contains('polish')
fatigue
from additive.utility import dfe, get_file_info
files = glob.glob("/home/ben_rasoolov/additive_project/data/experiment_03/*.info")
# files = get_file_info(pd.Series(files)).merge(fatigue, on='specimen')['files']
# files = files[~files.str.contains('V13')]
#polished_files = glob.glob("/home/ben_rasoolov/additive_project/data/experiment_03/Polished_*.info")
#asbuilt_files = [x.replace("Polished_", '') for x in polished_files]
# all_files = [ x for x in polished_files+asbuilt_files if os.path.exists(x)]
def min_max_scale(x, a=0, b=1):
mn, mx = x.min(), x.max()
rng = b - a
out = (x - mn)/(mx-mn)
return out * rng + a
from scipy.stats import mode
from additive.feature_functions import feature_functions_functions as feat_funs
def image_stats(x, feat_funs):
funs = {k: v for k, v in feat_funs.items()}
return compute({k: delayed(v)(x) for k, v in funs.items()})[0]
from additive.preprocessing import correct_aligment
def read_file_equalize_and_get_stats(file):
data = joblib.load(file)
image = np.array(data['value'].x)[500:-500, 500:-500]
# image = load_and_process_image(file, lambda x: x['value'].x, (500, 500))
# image = correct_aligment(image, l=300)
scaled_image = min_max_scale(image, 0, 255).astype('uint8')
equalized_image = cv2.equalizeHist(scaled_image)
return image_stats(equalized_image)
def read_file_and_get_stats(file):
data = joblib.load(file)
image = np.array(data['value'].x)[500:-500, 500:-500]
# image = load_and_process_image(file, lambda x: x['value'].x, (500, 500))
# image = correct_aligment(image, l=300)
return image_stats(image)
with Pool(8) as p:
# res = p.map(read_file_equalize_and_get_stats, files)
res = p.map(read_file_and_get_stats, files)
out = pd.DataFrame(res).assign(file=[dfe(x)[1] for x in files])
equalized_stats = get_file_info(out, 'file').drop('file', axis=1)
# equalized_stats.to_csv("/home/ben_rasoolov/additive_project/data/paper/data/global_stats_equalize_hist_v01.csv")
files_to_drop = {'/home/ben_rasoolov/additive_project/data/experiment_03/V02_T1_L_3d.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V02_T1_R_3d.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V02_T2_L_3d.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V02_T2_R_3d.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V03_T1_L_3d.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V03_T1_R_3d.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V03_T2_L_3d.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V03_T2_R_3d.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V04_T1_L_3d.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V04_T1_R_3d.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V04_T2_L_3d.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V04_T2_R_3d.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V05_T1_L_3d.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V05_T1_R_3d.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V05_T2_L_3d.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V05_T2_R_3d.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V06_T1_L_3d.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V06_T1_R_3d.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V06_T2_L_3d.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V06_T2_R_3d.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V19_T1_Left(Bottom)_500X_3D_2.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V19_T1_Right(Top)_500X_3D_2.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V19_T2_Left(Bottom)_500X_3D_2.info',
'/home/ben_rasoolov/additive_project/data/experiment_03/V19_T2_Right(Top)_500X_3D_2.info'}
t = equalized_stats.assign(file=files)
cond = ~t.file.isin(files_to_drop)
t[cond].to_csv("/home/ben_rasoolov/additive_project/data/paper/data/global_stats_no_transform_v01.csv")
cond = equalized_stats['ispolished']
polished_vs_unpolished = equalized_stats[cond]\
.merge(
equalized_stats[~cond],
on=['specimen', 'T', 'RL'],
suffixes = ['_polished', '_unpolished']
).sort_values(['specimen', 'T', 'RL'])
chosen = polished_vs_unpolished[['specimen', 'T', 'RL', 'rp_2d_polished', 'rp_2d_unpolished', 'rv_2d_polished', 'rv_2d_unpolished']]
tmp = chosen# [chosen['specimen'] != 'V13']
res = equalized_stats[equalized_stats['specimen'] != 'V13'].groupby(['ispolished', 'specimen']).median()\
.reset_index().merge(fatigue, on='specimen')
res['Reversal to failure (2Nf)'] = np.log(res['Reversal to failure (2Nf)'])
res = res.sort_values(['ispolished_x'], ascending=False).drop_duplicates('specimen')
# res = pd.read_csv("/home/ben_rasoolov/additive_project/data/paper/data/hist_equalized_global_stats_v02.csv")
res['Reversal to failure (2Nf)'] = np.exp(res['Reversal to failure (2Nf)'])
res.sort_values(['Reversal to failure (2Nf)'])
for strain in sorted(res['Strain Amplitude (mm/mm)'].unique()):
cond = res['Strain Amplitude (mm/mm)'] == strain
sns.scatterplot(data=res[cond], x='mode_1d', y='Reversal to failure (2Nf)', hue='ispolished_x')
plt.title(strain)
plt.show()
import re
from additive.utility import pick_cols
pick_cols(res, '(ispolished)|(Frequency)|(Fatigue)', reverse=True)\
.sort_values('Reversal to failure (2Nf)')\
#.to_csv( "/home/ben_rasoolov/additive_project/data/paper/data/hist_equalized_global_stats_v02.csv")
#cond = res['specimen'] > 'V06'
#x = res[['ra_1d']]
X = pick_cols(res, '(.*_.d)|(Strain)').values
y = np.log(res['Reversal to failure (2Nf)'].values)
from sklearn.preprocessing import StandardScaler, MinMaxScaler
from sklearn.model_selection import train_test_split
Xtrain, Xtest, ytrain, ytest = train_test_split(X, y, test_size=.2)
scaler = StandardScaler()
Xtrain = scaler.fit_transform(Xtrain)
from sklearn.ensemble import RandomForestRegressor
model = RandomForestRegressor(n_estimators=1000, n_jobs=10)
model.fit(Xtrain, ytrain)
#np.sqrt(((model.predict(Xtrain) - ytrain)**2).sum())
np.sqrt(((model.predict(Xtest) - ytest)**2).sum())
plt.scatter(model.predict(X), y)
from tensorflow import keras
model = keras.Sequential([
keras.Input(Xtrain.shape[1]),
keras.layers.Dense(50, activation='relu'),
keras.layers.Dropout(.3),
keras.layers.Dense(100, activation='relu'),
keras.layers.Dropout(.3),
keras.layers.Dense(50, activation='relu'),
keras.layers.Dense(1),
])
model.compile(optimizer='adam', loss='mse', )
model.fit(Xtrain, ytrain, epochs=10000, validation_split=.2,
callbacks=[keras.callbacks.EarlyStopping(patience=100, restore_best_weights=True)])
model.evaluate(scaler.transform(Xtest), ytest)
plt.scatter(y, model.predict(scaler.transform(X)))
%load_ext autoreload
%autoreload 2
%reset -f
from imports import *
polished_data = joblib.load("/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V03_T1_Left(Bottom)_500X_3D.info")
asbuilt_data = joblib.load("/home/ben_rasoolov/additive_project/data/experiment_03/V03_T1_Left(Bottom)_500X_3D.info")
polished_im = np.array(polished_data['value'].x)
asbuilt_im = np.array(asbuilt_data['value'].x)
polished_im.shape, asbuilt_im.shape
d = 100
t = 50
a_pol = polished_im[2000:-2000, 2000:-2000]
a_pol -= a_pol.mean()
w, h = a_pol.shape
a_asb = asbuilt_im[2000+t:2000+t+w, 2000+d:2000+d+h]
a_asb -= a_asb.mean()
%matplotlib inline
fig, axes = plt.subplots(1, 2, figsize=(16, 5))
axes = axes.reshape(-1)
c = axes[1].imshow(a_pol, cmap='jet')
axes[1].set_title('Polished Image')
cax = fig.add_axes([0.89, 0.1, 0.02, 0.78])
fig.colorbar(c, cax=cax)
c = axes[0].imshow(a_asb, cmap='jet')
axes[0].set_title('As-built Image')
cax = fig.add_axes([0.47, 0.1, 0.02, 0.78])
fig.colorbar(c, cax=cax)
plt.savefig("/home/ben_rasoolov/additive_project/data/paper/figures/asbuilt_vs_polished_2d.png", dpi=300)
def min_max_scale(x, a=0, b=1):
mn, mx = x.min(), x.max()
rng = b - a
out = (x - mn)/(mx-mn)
return out * rng + a
a_pol_equalized = cv2.equalizeHist(min_max_scale(a_pol, 0, 255).astype('uint8'))
# a_pol_equalized = min_max_scale(a_pol_equalized, 0, a_pol.max()-a_pol.min())
a_asb_equalized = cv2.equalizeHist(min_max_scale(a_asb, 0, 255).astype('uint8'))
# a_asb_equalized = min_max_scale(a_asb_equalized, 0, a_asb.max()-a_asb.min())
%matplotlib inline
fig, axes = plt.subplots(1, 2, figsize=(16, 5))
axes = axes.reshape(-1)
c = axes[1].imshow(a_pol_equalized)
axes[1].set_title('Polished Image')
cax = fig.add_axes([0.89, 0.1, 0.02, 0.78])
fig.colorbar(c, cax=cax)
c = axes[0].imshow(a_asb_equalized)
axes[0].set_title('As-built Image')
cax = fig.add_axes([0.47, 0.1, 0.02, 0.78])
fig.colorbar(c, cax=cax)
#plt.savefig("/home/ben_rasoolov/additive_project/data/paper/figures/asbuilt_vs_polished_2d.png", dpi=300)
from scipy.stats import mode
from additive.feature_functions import feature_functions_functions as feat_funs
def image_stats(x):
funs = {k: v for k, v in feat_funs.items() if '_2d' in k}
return compute({k: delayed(v)(x) for k, v in funs.items()})[0]
image_stats(a_pol), image_stats(a_asb)
image_stats(a_pol_equalized), image_stats(a_asb_equalized)
plt.hist(a_pol.reshape(-1), alpha=.2)
plt.plot([a_pol.mean(), a_pol.mean()], [0, 4e6])
plt.hist(a_asb.reshape(-1), alpha=.2)
plt.plot([a_asb.mean(), a_pol.mean()], [0, 4e6])
plt.hist(a_pol_equalized.reshape(-1), alpha=.2)
plt.hist(a_asb_equalized.reshape(-1), alpha=.2)
%load_ext autoreload
%autoreload 2
%reset -f
from imports import *
polished_data = joblib.load("../data/experiment_03/Polished_V03_T1_Left(Bottom)_500X_3D.info")
asbuilt_data = joblib.load("../data/experiment_03/V03_T1_Left(Bottom)_500X_3D.info")
polished_im = np.array(polished_data['value'].x)
asbuilt_im = np.array(asbuilt_data)
polished_im.shape, asbuilt_im.shape
d = 100
t = 50
a_pol = polished_im[2000:-2000, 2000:-2000]
a_pol -= a_pol.mean()
w, h = a_pol.shape
a_asb = asbuilt_im[2000+t:2000+t+w, 2000+d:2000+d+h]
a_asb -= a_asb.mean()
%matplotlib inline
fig, axes = plt.subplots(1, 2, figsize=(16, 5))
axes = axes.reshape(-1)
c = axes[1].imshow(a_pol, cmap='jet')
axes[1].set_title('Polished Image')
cax = fig.add_axes([0.89, 0.1, 0.02, 0.78])
fig.colorbar(c, cax=cax)
c = axes[0].imshow(a_asb, cmap='jet')
axes[0].set_title('As-built Image')
cax = fig.add_axes([0.47, 0.1, 0.02, 0.78])
fig.colorbar(c, cax=cax)
plt.savefig("../data/paper/figures/asbuilt_vs_polished_2d.png", dpi=300)
from additive.experimental import plot_3d_surface
%matplotlib notebook
fig, axes = plt.subplots(figsize=(8, 8))
ax = fig.gca(projection='3d')
plot_3d_surface(polished_im[2000:-2000, 2000:-2000], step=30, ax=ax, cmap='jet')
plot_3d_surface(asbuilt_im[2000+t:-2000+t, 2000+d:-2000+d]+200, step=30, ax=ax, cmap='jet')
plt.savefig("/home/ben_rasoolov/additive_project/data/paper/figures/asbuilt_vs_polished_3d.png", dpi=300)
%reset -f
%load_ext autoreload
%autoreload 2
from imports import *
data = joblib.load("../data/experiment_03/V01_T2_Left(Bottom)_500X_3D.info")
image = np.array(data['value'].x)
from scipy.ndimage import rotate
angle = 1.5
rotated = rotate(image, -angle)
double_rotated = rotate(rotated, angle)
fig, axes = plt.subplots(1, 2, figsize=(16, 10))
axes[0].imshow(rotated[150:-180, 500:-500], cmap='jet')
axes[1].imshow(double_rotated[150:-180, 500:-500], cmap='jet')
axes[0].set_title("Before alignment")
axes[1].set_title("After alignment")
plt.savefig("../data/paper/figures/before_after_alignment.png", dpi=300)
%reset -f
%load_ext autoreload
%autoreload 2
from imports import *
from additive.preprocessing import load_and_process_image
data = joblib.load("/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V17_T1_Left(Bottom)_500X_3D.info")
image = load_and_process_image(np.array(data['value'].x), crop_size=[1000, 300])
image.shape
from scipy.stats import mode
modes = mode(image, axis=1)
print(mode(image.reshape(-1)))
print(modes.mode.mean())
image.mean()
from additive.preprocessing import load_and_process_image
data2 = joblib.load("/home/ben_rasoolov/additive_project/data/experiment_03/V17_T1_Left(Bottom)_500X_3D.info")
image2 = load_and_process_image(np.array(data2['value'].x), crop_size=[1000, 300])
image2.shape
modes2 = mode(image2, axis=1)
print(mode(image2.reshape(-1)))
print(modes2.mode.mean())
image2.mean()
%load_ext autoreload
%autoreload 2
%reset -f
import numpy as np
import joblib
import glob
from imports import *
from additive.feature_functions import feature_functions_functions as feature_funs
from additive.features import Features
from additive.preprocessing import load_and_process_image
from functools import reduce
from operator import or_
def isin(values, text, ignore_case=False):
if ignore_case:
return reduce(or_, [v.lower() in text.lower() for v in values])
return reduce(or_, [v in text for v in values])
################### ################### ################### ###################
## version 1: all files
files = glob.glob("/home/ben_rasoolov/additive_project/data/experiment_03/*info")
chosen_files = [f for f in files if isin(['v18', 'v12', 'v14', 'v13', 'v11', 'v17'], f, ignore_case=True)]
print(len(chosen_files))
## version 2: v19 repeated measurements
################### ################### ################### ###################
#info_files = glob.glob("/home/ben_rasoolov/additive_project/data/experiment_03//*_2.info")
#chosen_files = info_files + [x.replace("_2", "") for x in info_files]
#len(chosen_files)
def get_features(img, features=None):
if features is None:
return {feature: fun(img) for feature, fun in feature_funs.items()}
return {feature: fun(img) for feature, fun in feature_funs.items() if feature in features}
from multiprocessing import Pool
import dask
def get_features_from_files(chosen_files, n_procs=8):
images = bag.from_sequence(chosen_files)
preprocessed_images = images.map(load_and_process_image,
transform_fun=lambda data: np.array(data['value'].x), crop_size=(1000, 1000))
features = preprocessed_images.map(get_features, features=['mode_1d', 'mode_2d'])
with Pool(min(len(chosen_files), n_procs)) as p:
with dask.config.set(pool=p):
out = features.compute()
return out
out = get_features_from_files(chosen_files)
import pandas as pd
from additive.utility import get_file_info
df = pd.DataFrame(dict(zip(chosen_files, out))).T
df.index = df.index.map(lambda x: x.split("/")[-1].split(".")[0]).rename("file")
df = df.reset_index()
df = get_file_info(df['file']).join(df)
df
# orig = pd.read_csv("/home/ben_rasoolov/additive_project/data/paper/data/global_stats_tilted_rotated_cropped_v04.csv")
# df4 = orig.drop(['mode_1d', 'mode_2d'], axis=1).merge(df, on=['ispolished', 'specimen', 'T', 'RL'])
# df4.to_csv("/home/ben_rasoolov/additive_project/data/paper/data/global_stats_tilted_rotated_cropped_v04.csv", index=False)
orig[orig.ispolished & orig['specimen'].isin({'V11', 'V13', 'V17'})].groupby(['specimen']).max()
orig = pd.read_csv("/home/ben_rasoolov/additive_project/data/paper/data/global_stats_tilted_rotated_cropped_v04.csv")
cols_to_drop = [x for x in orig.columns if x.endswith('_2d')] + ['file_x', 'file_y']
tmp = orig.drop(cols_to_drop, axis=1)
res = tmp[tmp['ispolished']].merge(tmp[~tmp['ispolished']], on=['specimen', 'T', 'RL'])\
.drop(['ispolished_x', 'ispolished_y'], axis=1)
res
y_cols = cols = "ra_1d_y rq_1d_y rsk_1d_y rku_1d_y rp_1d_y rv_1d_y mode_1d_y rv_1d_y".split()
tmp = res[y_cols].drop(10)
tmp
x_cols = 'ra_1d rq_1d rsk_1d rku_1d rp_1d mode_1d'.split()
y_cols = ['rv_1d']
from sklearn.linear_model import LinearRegression
from sklearn.linear_model import Ridge
from sklearn.neural_network import MLPRegressor
from sklearn.preprocessing import PolynomialFeatures
from sklearn.model_selection import train_test_split
x = tmp[x_cols].values
y = tmp[y_cols].values
xtrain, xtest, ytrain, ytest = train_test_split(x, y, test_size=.2)
import tensorflow as tf
import tensorflow.keras as keras
model = keras.models.Sequential([
keras.layers.Input([x.shape[-1]]),
keras.layers.Dense(50, activation='relu'),
keras.layers.Dropout(.3),
keras.layers.Dense(100, activation='relu'),
keras.layers.Dropout(.3),
keras.layers.Dense(50, activation='relu'),
keras.layers.Dropout(.3),
keras.layers.Dense(50, activation='relu'),
keras.layers.Dropout(.3),
keras.layers.Dense(1),
])
model.compile(optimizer='adam', loss='mse')
#model = MLPRegressor(2000, )
model.fit(xtrain, ytrain, epochs=10000, validation_split=.1,
callbacks=[keras.callbacks.EarlyStopping(patience=1000, restore_best_weights=True)])
np.sqrt(((model.predict(xtrain) - ytrain)**2).sum())
#np.sqrt(((model.predict(xtest) - ytest)**2).sum())
pd.DataFrame(np.concatenate([model.predict(xtest) , ytest], axis=1))
from scipy.stats import mode
fig, axes = plt.subplots(1, 1, figsize=(25, 5))
profile = image[3000][100:2500].copy()
plt.plot(profile, label="as-built", linewidth=3, linestyle='--', color='blue')
plt.plot([0, len(profile)], [profile.mean()]*2, label='as-built mean', linewidth=3, linestyle='dotted', color='blue')
mode1 = mode(profile[profile<profile.mean()].round(decimals=0)).mode
q3 = np.percentile(profile, 60)
cond = profile>q3
profile[cond] = q3 +(profile[cond] - q3)*.5
plt.plot(profile, label='polished', linewidth=3, color='orange')
plt.plot([0, len(profile)], [profile.mean()]*2, label='polished mean', linewidth=3, linestyle='dotted', color='orange')
#mode2 = mode(profile[~cond].round(decimals=0)).mode
mode2 = mode(profile[profile<profile.mean()].round(decimals=0)).mode
print(mode1, mode2)
plt.plot([0, len(profile)], [mode1]*2, label="as-built and polished mode", linewidth=3, linestyle='-.')
plt.legend()
plt.savefig("/home/ben_rasoolov/additive_project/data/paper/figures/mode_vs_mean.png", dpi=300)
%load_ext autoreload
%autoreload 2
%reset -f
import numpy as np
import joblib
import glob
from imports import *
from additive.feature_functions import feature_functions_functions as feature_funs
from additive.features import Features
from additive.preprocessing import load_and_process_image
from functools import reduce
from operator import or_
data_polished = joblib.load("/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V15_T1_Left(Bottom)_500X_3D.info")
x_polished = np.array(data_polished['value'].x)[1000:-1000, 1000:-1000].round(decimals=0)
from scipy.stats import mode
modes = mode(x_polished)
mode_polished = modes.mode.mean() - x_polished.mean()
#mode_polished = (np.percentile(x_polished, 90, axis=1)-x_polished.mean(axis=1)).mean()
rv_polished = (x_polished.min(axis=1)-x_polished.mean(axis=1)).mean()
data_asbuilt = joblib.load("/home/ben_rasoolov/additive_project/data/experiment_03/V15_T1_Left(Bottom)_500X_3D.info")
x_asbuilt = np.array(data_asbuilt['value'].x)[1000:-1000, 1000:-1000].round(decimals=0)
modes = mode(x_asbuilt)
mode_asbuilt = modes.mode.mean() - x_asbuilt.mean()
#mode_asbuilt = (np.percentile(x_asbuilt, 90, axis=1)-x_asbuilt.mean(axis=1)).mean()
rv_asbuilt = (x_asbuilt.min(axis=1)-x_asbuilt.mean(axis=1)).mean()
rku_polished = feature_funs['rku_2d'](x_polished)
rku_asbuilt = feature_funs['rku_2d'](x_asbuilt)
rku_asbuilt, rku_polished
mode_asbuilt, mode_polished
rv_asbuilt, rv_polished
rv_asbuilt-rku_asbuilt*mode_asbuilt, rv_polished-rku_polished*mode_polished
rv_asbuilt/x_asbuilt.std(), rv_polished/x_polished.std()
mode1 = mode(x_polished).mode.mean()
mu1 = x_polished.mean()
mode2 = mode(x_asbuilt).mode.mean()
mu2 = x_asbuilt.mean()
x1 = (x_polished.reshape(-1)-x_polished.mean())/x_polished.std()
x2 = (x_asbuilt.reshape(-1)-x_asbuilt.mean())/x_asbuilt.std()
fig, ax = plt.subplots(figsize=(10, 6))
ax.hist(x1, alpha=.5, label='polished', density=False)
ax.hist(x2, alpha=.5, label='as-built', density=False)
ax.plot([mode1-mu1, mode1-mu1], [0, .025], label='polished mode')
ax.plot([0, 0], [0, .025], label='polished mean')
ax.plot([mode2-mu2, mode2-mu2], [0, .025], label='polished mode')
ax.plot([0, 0], [0, .025], label='polished mean')
ax.set_xlabel('Height')
ax.set_ylabel('Frequency')
ax.legend()
def get_stats(x):
functions = [np.mean, np.median, lambda x: mode(x).mode.mean()]
return compute(delayed(f)(x) for f in functions)
get_stats(x_asbuilt)
get_stats(x_polished)
500*20
%load_ext autoreload
%autoreload 2
%reset -f
from imports import *
# df = pd.read_csv("/home/ben_rasoolov/additive_project/data/paper/data/global_stats_equalize_hist_v02.csv")
box_files = {
"morePolished_V07_T1_Left(Bottom)_500X_3D.csv": 'https://auburn.box.com/shared/static/fmdgsitx225kiz856uzmmdxlo30pepyi.csv',
"morePolished_V07_T1_Right(Top)_500X_3D.csv": "https://auburn.box.com/shared/static/xgoav7ttot5kgsgoul1ee2p5cy5vuct9.csv",
"morePolished_V07_T2_Right(Top)_500X_3D.csv": "https://auburn.box.com/shared/static/7t3g815aioj4zwkzl33jx1tsivvd8cm6.csv",
"morePolished_V07_T2_Left(Bottom)_500X_3D.csv": "https://auburn.box.com/shared/static/cy4ny0h4kzs1322htsfh7y35h78rd7uu.csv",
}
from additive.utility import download_from_dict
# download_from_dict(box_files, "/home/ben_rasoolov/additive_project/data/original_images/")
files = glob.glob("/data/additive_project/data/original_images/morePolished_*csv")
files
from additive.utility import extract_array_from_csv
with Pool(4) as p:
out = p.map(extract_array_from_csv, files)
from additive.utility import image_rescale
images_d = image_rescale(files)
with Pool(4) as p:
with dask.config.set(pool=p):
images = images_d.compute()
files = glob.glob("/home/ben_rasoolov/additive_project/data/original_images/morePolished_*pd")
files
from additive.utility import dfe
from collections import namedtuple
ImageInfo = namedtuple("ImageInfo", "x")
root = "/home/ben_rasoolov/additive_project/data/experiment_04/"
for file, image in zip(files, images):
d, f, e = dfe(file)
new_path = root + f + ".info"
joblib.dump({'value': ImageInfo(image)}, new_path)
%load_ext autoreload
%autoreload 2
%reset -f
from imports import *
from additive.features import ImageInfo, Features
# df = pd.read_csv("/home/ben_rasoolov/additive_project/data/paper/data/global_stats_equalize_hist_v02.csv")
files = glob.glob("../data/experiment_04/*info")
files
ImageInfo = namedtuple("ImageInfo", "x")
def load_images(x):
return joblib.load(x)['value'].x
with Pool(6) as p:
images = p.map(load_images, files)
from scipy.stats import mode
from additive.feature_functions import feature_functions_functions as feat_funs
def image_stats(x):
funs = {k: v for k, v in feat_funs.items()}
return compute({k: delayed(v)(x) for k, v in funs.items()})[0]
res = [image_stats(x[500:-500, 500:-500]) for x in images]
pd.DataFrame(res).assign(file=[x.split("/")[-1] for x in files])
def min_max_scale(x, a=0, b=1):
mn, mx = x.min(), x.max()
rng = b - a
out = (x - mn)/(mx-mn)
return out * rng + a
def get_equalized_stats(image):
scaled_image = min_max_scale(image, 0, 255).astype('uint8')
equalized_image = cv2.equalizeHist(scaled_image)
return image_stats(equalized_image)
equalized_res = compute([delayed(get_equalized_stats)(x[500:-500, 500:-500]) for x in images])[0]
pd.DataFrame(equalized_res).assign(file=[x.split("/")[-1] for x in files])[['rp_1d', 'rv_1d', 'mode_1d', 'file']]\
.sort_values(['mode_1d'])
dd = dict(zip([x.split("/")[-1] for x in files], images))
im1 = dd['V07_T1_Left(Bottom)_500X_3D.info']
im2 = dd['Polished_V07_T1_Left(Bottom)_500X_3D.info']
im3 = dd['morePolished_V07_T1_Left(Bottom)_500X_3D.info']
ims = [im1, im2, im3]
names = ['asbuilt', 'polished', 'more polished']
dxs = [-450, -430, -600]
dys = [650, 1620, 720]
fig, axes = plt.subplots(1, 3, figsize=(18, 6))
sub_ims = []
for name, im, ax, dx, dy in zip(names, ims, axes, dxs, dys):
print(im.shape)
sub_im = im[1000+dx:2000+dx, 1000+dy:2000+dy]
ax.imshow(sub_im, cmap='jet')
sub_ims.append(sub_im)
ax.set_title(name)
plt.savefig("../data/paper/figures/polishedVsHalfVsMorePolished3D.png", dpi=300)
import matplotlib as mpl
mpl.rcParams.update(mpl.rcParamsDefault)
font = {'size' : 13}
matplotlib.rc('font', **font)
fig, axes = plt.subplots(1, 1, figsize=(20, 4))
N = 1000
for name, sub_im, dx, dy in zip(names, sub_ims, [-20, 10, 30], [0, 45, 60]):
x = np.arange(sub_im.shape[1])
plt.plot(x+dx, sub_im[500]-sub_im.min(), label=name, linewidth=3, alpha=.8)
plt.legend(loc='upper left')
plt.savefig("../data/paper/figures/polishedVsHalfVsMorePolished.png", dpi=300)
from additive.experimental import get_best_template_match, get_top_left_best_template_match, equalize_hist
template = ims[0][1000:-1000, 1000:-1000]
out1 = get_best_template_match(ims[1], template, methods[1])
out2 = get_best_template_match(ims[2], template, methods[1])
fig, axes = plt.subplots(1, 3, figsize=(15, 5))
s = slice(1500, 2000)
axes[0].imshow(template[s,s])
axes[1].imshow(out1[s, s])
axes[2].imshow(out2[s, s])
w, h = 5500, 5500
l0 = 1000, 1000
template = ims[0][l0[0]:l0[0]+w, l0[1]:l0[1]+h]
l1 = get_top_left_best_template_match(ims[1], template, methods[3])
l2 = get_top_left_best_template_match(ims[2], template, methods[3])
l0, l1, l2
l0, l1, l2
fig, axes = plt.subplots(1, 3, figsize=(15, 5))
profiles = []
for ax, im, (x, y), label in zip(axes, ims, [l0, l1, l2], ['asbuilt', 'polished', 'more polished']):
ax.imshow(im[x:x+500, y:y+500])
plt.subplots(figsize=(15, 5))
profiles = []
for im, (x, y), label in zip(ims, [l0, l1, l2], ['asbuilt', 'polished', 'more polished']):
#plt.imshow(im[x:x+500, y:y+500])
# profile = equalize_hist(im[x:x+500, y:y+2000])[200]
profile = im[x:x+500, y:y+2000][200].round(decimals=0)
plt.plot(profile-profile.min(), label=label)
profiles.append(profile)
plt.legend()
from additive.experimental import extract_circles
profile = profiles[0]
circles = []
for profile in profiles:
circles.append(extract_circles(np.arange(len(profile)), profile, 100))
from scipy.stats import mode
[mode(profile).mode for profile in profiles]
[profile.mean()-profile.min() for profile in profiles]
pd.concat([pd.DataFrame(circle[-1]).mean() for circle in circles], axis=1).round(decimals=2)
from additive.experimental import get_cut_points
[len(profile)/len(get_cut_points(profile, mode(profile).mode[0])) for profile in profiles]
[len(profile)/len(get_cut_points(profile, np.percentile(profile, 30))) for profile in profiles]
from additive.experimental import get_ratio_under_thresh
[get_ratio_under_thresh(profile, np.percentile(profile, 15)) for profile in profiles]
tmp = [[(im<np.min(im)+t).sum()/(im.shape[0]*im.shape[1]) for im in ims] for t in range(10, 200, 10)]
plt.plot(tmp)
%load_ext autoreload
%autoreload 2
%reset -f
from imports import *
# df = pd.read_csv("/home/ben_rasoolov/additive_project/data/paper/data/global_stats_equalize_hist_v02.csv")
methods = [cv2.TM_CCOEFF, cv2.TM_CCOEFF_NORMED, cv2.TM_CCORR,
cv2.TM_CCORR_NORMED, cv2.TM_SQDIFF, cv2.TM_SQDIFF_NORMED]
polished_files = glob.glob("/home/ben_rasoolov/additive_project/data/experiment_03/Polished*")
unpolished_files = [x.replace('Polished_', '') for x in polished_files]
assert len([x for x in unpolished_files if not os.path.exists(x)]) == 0
assert len(polished_files) == len(unpolished_files)
len(polished_files)
from additive.experimental import match_asbuilt_unpolished, get_image_from_top_left, image_correlation
index = 30
as_built = np.array(joblib.load(unpolished_files[index])['value'].x)
polished = np.array(joblib.load(polished_files[index])['value'].x)
print(unpolished_files[index])
r1, r2 = match_asbuilt_unpolished(as_built, polished, .7)
plt.imshow(get_image_from_top_left(as_built, r1, (1000, 1000))[:3000, :3000])
plt.show()
plt.imshow(get_image_from_top_left(polished, r2, (1000, 1000))[:3000, :3000])
def load_image(x):
return np.array(joblib.load(x)['value'].x)
polished_files_b = bag.from_sequence(polished_files).map(load_image)
asbuilt_files_b = bag.from_sequence(unpolished_files).map(load_image)
top_left_matches_b = bag.map(match_asbuilt_unpolished, asbuilt_files_b, polished_files_b, ratio=.5)
with Pool(10) as p:
with dask.config.set(pool=p):
top_left_matches_5 = top_left_matches_b.compute()
top_left_matches = []
for (file, x, y, z) in zip(unpolished_files, top_left_matches_3, top_left_matches_5, top_left_matches_7):
file = file.split('/')[-1]
error1 = np.sqrt(((np.array(x)-np.array(y))**2).sum())
error2 = np.sqrt(((np.array(x)-np.array(z))**2).sum())
error3 = np.sqrt(((np.array(y)-np.array(z))**2).sum())
if error1 < 50:
res = x
error = error1
elif error2 < 50:
res = x
error = error2
elif error3 < 50:
res = y
error = error3
else:
res = None
print(res)
top_left_matches.append(res)
# if error > 40:
# print(file, error)
# print(file, error)
joblib.dump((unpolished_files, unpolished_files, top_left_matches_3, top_left_matches_5, top_left_matches_7),
"/home/ben_rasoolov/additive_project/data/experiment_03/polished_vs_asbuilt_top_left_matches.tuple")
joblib.dump((list(unpolished_files), list(polished_files), top_left_matches, correlations),
"/home/ben_rasoolov/additive_project/data/experiment_03/polished_vs_asbuilt_top_left_matches.list")
import random
polished_vs_asbuilt_top_left_matches = joblib.load(
"/home/ben_rasoolov/additive_project/data/experiment_03/polished_vs_asbuilt_top_left_matches.list")
# polished_vs_asbuilt_top_left_matches = zip(*polished_vs_asbuilt_top_left_matches)
dxy = (500, 500)
correlations = []
for as_file, po_file, val, *_ in zip(*polished_vs_asbuilt_top_left_matches):
print(as_file)
if val is None:
correlations.append(-1)
continue
as_tl, po_tl = val
asbuilt = load_image(as_file)
polished = load_image(po_file)
x = get_image_from_top_left(asbuilt, as_tl, dxy)[:2000, 2000]
y = get_image_from_top_left(polished, po_tl, dxy)[:2000, 2000]
plt.plot(x)
plt.plot(y)
break
# correlation = image_correlation(x, y)
# correlations.append(correlation)
# print(correlation)
tmp = joblib.load("/home/ben_rasoolov/additive_project/data/experiment_03/polished_vs_asbuilt_top_left_matches.list")
res = pd.DataFrame(sorted(zip(*tmp), key=lambda x: x[-1]), columns=['asbuilt', 'polished', 'top_left_match', 'correlation'])
res[(res['correlation']<1) & (res['correlation']>.21)]
dxy = np.random.randint(4000, size=2)
plt.imshow(get_image_from_top_left(as_built, as_tl, dxy)[:1000, :1000])
plt.show()
plt.imshow(get_image_from_top_left(polished, po_tl, dxy)[:1000, :1000])
match_asbuilt_unpolished(asbuilt, polished, .7)
match_asbuilt_unpolished(polished, asbuilt, .8)
data = joblib.load("../data/experiment_03/Polished_V17_T1_Left(Bottom)_500X_3D.info")
image = np.array(data['value'].x)
data_ab = joblib.load("../data/experiment_03/V17_T1_Left(Bottom)_500X_3D.info")
image_ab = np.array(data_ab['value'].x)
plt.subplots(figsize=(20, 20))
plt.imshow(image_ab[500:-500], cmap='jet')
plt.subplots(figsize=(20, 20))
plt.imshow(image[500:-500], cmap='jet')
df = pd.read_csv("../data/paper/data/global_stats_equalize_hist_v02.csv")
df[df['specimen']=='V01'].groupby(['ispolished']).mean()
def get_v01(path):
df2 = pd.read_csv(path)
return df2[df2['specimen']=='V01']#.groupby(['ispolished']).mean()
files = glob.glob("../data/experiment_03/V01*")
from additive.feature_functions import *
# out_b = bag.from_sequence(files).map(lambda x: np.array(joblib.load(x)['value'].x)[500:-500])
with Pool(5) as pool:
with dask.config.set(pool=pool):
out = out_b.compute()
list(map(mode_2d, out))
list(map(mode_1d, out))
pd.read_csv("../data/paper/global_stats_tilted_rotated_cropped_v02.csv")
%load_ext autoreload
%autoreload 2
from imports import *
# p = joblib.load('../data/experiment_03/Polished_V03_T1_Right(Top)_500X_3D.info')
p = joblib.load('../data/experiment_03/Polished_V17_T2_Left(Bottom)_500X_3D.info')
# a = joblib.load("../data/experiment_03/V03_T1_Right(Top)_500X_3D.info")
a = joblib.load("../data/experiment_03/V17_T2_Left(Bottom)_500X_3D.info")
pol = np.array(p['value'].x)[1000:-1000]
try: asb = a[1000:-1000]
except: asb = np.array(a['value'].x[1000:-1000])
from additive.experimental import match_asbuilt_unpolished, get_image_from_top_left
r1, r2 = match_asbuilt_unpolished(asb, pol, .4)
r1, r2, asb.shape, pol.shape
index = 0#np.random.randint(0, 1000)
size = 3000
asub = get_image_from_top_left(asb, r1, (1000, 1000))[index:index+size, index:index+size]
psub = get_image_from_top_left(pol, r2, (1000, 1000))[index:index+size, index:index+size]
from scipy.stats import pearsonr
pearsonr(asub.reshape(-1), psub.reshape(-1))
fig, axes = plt.subplots(1, 2, figsize=(10, 5))
for ax, img in zip(axes, [asub, psub]):
ax.imshow(img)
ax.grid(False)
plt.subplots(figsize=(10, 5))
ahist = plt.hist(asub.reshape(-1)-asub.mean(), bins=55, alpha=.9, label="As-built Histogram",
density=True, linewidth=1, edgecolor=None, color="#F08080")
phist = plt.hist(psub.reshape(-1)-psub.mean(), bins=55, alpha=.7, label="Polished Histogram",
density=True, edgecolor=None, color='#008B8B')
amode = get_mode(ahist)
pmode = get_mode(phist)
height = phist[0].max()
plt.vlines(pmode, 0, height, linestyle='--', label="Polished Mode")
plt.vlines(amode, 0, height, linestyle='dotted', label="As-built Mode")
plt.xlabel("Adjusted height ($v_i - \mu_i$)")
plt.ylabel("Relative Frequency")
plt.legend()
plt.savefig("../data/paper/figures/v17_t2_left_histogram_comparison.png", dpi=300)
def get_mode(hist_res):
a, b, *_ = hist_res
return b[np.argmax(a)]
get_mode(phist)
ahist
df = pd.read_csv("../data/paper/data/global_stats_no_transform_v02.csv").drop('Unnamed: 0', axis=1)
df.groupby(['ispolished', 'specimen']).mean().to_csv("../data/paper/data/table_02_v01.csv")#.filter(regex=".*2d")
from scipy.stats import norm
x = norm.rvs(100, 100, size=(50, 7000))
freq, val = np.histogram(x, bins=100, density=True, )
val[np.argmax(freq)]
rng = ((x.max() - x.min())/50).round(decimals=2)
from additive.feature_functions import mode_1d, mode
mode(x.round(2) // rng).mode.mean() * rng
mode_1d(x), mode_2d(x, 50)
mode(x.round(0).reshape(-1))
freq, val, *_ = plt.hist(x.reshape(-1), bins=50)
val[np.argmax(freq)]
freq, val, *_ = np.histogram(x.reshape(-1), bins=50)
val[np.argmax(freq)]
%load_ext autoreload
%autoreload 2
from imports import *
files = glob.glob("/data/additive_project/data/experiment_03/*info")
# client = Client(scheduler_file="/data/additive_project/jupyter/projects/schedfile")
from additive.feature_functions import feature_functions_functions as feat_funs
def image_stats(x, feat_funs):
funs = {k: v for k, v in feat_funs.items()}
return compute({k: delayed(v)(x) for k, v in funs.items()})[0]
from additive.preprocessing import correct_aligment, process_image
def read_file_and_get_stats(file):
print(file)
data = joblib.load(file)
try: data = np.array(data['value'].x)
except: data = np.array(data)
image = data[500:-500, 500:-500]
# image = process_image(image,)
return image_stats(image, feat_funs)
with Pool(2) as p:
with dask.config.set(pool=p):
res = bag.from_sequence(files).map(read_file_and_get_stats).compute()
out = pd.DataFrame(res).assign(file=files)
from additive.utility import *
get_file_info(out, 'file').to_csv("../data/paper/data/global_stats_v03.csv", index=False)